UNPKG

three-stdlib

Version:

stand-alone library of threejs examples

1 lines 125 kB
{"version":3,"file":"GLTFExporter.cjs","sources":["../../src/exporters/GLTFExporter.js"],"sourcesContent":["import {\n REVISION,\n BufferAttribute,\n ClampToEdgeWrapping,\n Color,\n DoubleSide,\n InterpolateDiscrete,\n InterpolateLinear,\n LinearFilter,\n LinearMipmapLinearFilter,\n LinearMipmapNearestFilter,\n MathUtils,\n Matrix4,\n MirroredRepeatWrapping,\n NearestFilter,\n NearestMipmapLinearFilter,\n NearestMipmapNearestFilter,\n PropertyBinding,\n RGBAFormat,\n RepeatWrapping,\n Scene,\n Texture,\n CompressedTexture,\n Vector3,\n PlaneGeometry,\n ShaderMaterial,\n Uniform,\n Mesh,\n PerspectiveCamera,\n WebGLRenderer,\n} from 'three'\nimport { version } from '../_polyfill/constants'\n\nasync function readAsDataURL(blob) {\n const buffer = await blob.arrayBuffer()\n const data = btoa(String.fromCharCode(...new Uint8Array(buffer)))\n return `data:${blob.type || ''};base64,${data}`\n}\n\nlet _renderer\nlet fullscreenQuadGeometry\nlet fullscreenQuadMaterial\nlet fullscreenQuad\n\nfunction decompress(texture, maxTextureSize = Infinity, renderer = null) {\n if (!fullscreenQuadGeometry) fullscreenQuadGeometry = new PlaneGeometry(2, 2, 1, 1)\n if (!fullscreenQuadMaterial)\n fullscreenQuadMaterial = new ShaderMaterial({\n uniforms: { blitTexture: new Uniform(texture) },\n vertexShader: /* glsl */ `\n varying vec2 vUv;\n void main(){\n vUv = uv;\n gl_Position = vec4(position.xy * 1.0,0.,.999999);\n }\n `,\n fragmentShader: /* glsl */ `\n uniform sampler2D blitTexture; \n varying vec2 vUv;\n\n void main(){ \n gl_FragColor = vec4(vUv.xy, 0, 1);\n \n #ifdef IS_SRGB\n gl_FragColor = LinearTosRGB( texture2D( blitTexture, vUv) );\n #else\n gl_FragColor = texture2D( blitTexture, vUv);\n #endif\n }\n `,\n })\n\n fullscreenQuadMaterial.uniforms.blitTexture.value = texture\n fullscreenQuadMaterial.defines.IS_SRGB =\n 'colorSpace' in texture ? texture.colorSpace === 'srgb' : texture.encoding === 3001\n fullscreenQuadMaterial.needsUpdate = true\n\n if (!fullscreenQuad) {\n fullscreenQuad = new Mesh(fullscreenQuadGeometry, fullscreenQuadMaterial)\n fullscreenQuad.frustrumCulled = false\n }\n\n const _camera = new PerspectiveCamera()\n const _scene = new Scene()\n _scene.add(fullscreenQuad)\n\n if (!renderer) {\n renderer = _renderer = new WebGLRenderer({ antialias: false })\n }\n\n renderer.setSize(Math.min(texture.image.width, maxTextureSize), Math.min(texture.image.height, maxTextureSize))\n renderer.clear()\n renderer.render(_scene, _camera)\n\n const readableTexture = new Texture(renderer.domElement)\n\n readableTexture.minFilter = texture.minFilter\n readableTexture.magFilter = texture.magFilter\n readableTexture.wrapS = texture.wrapS\n readableTexture.wrapT = texture.wrapT\n readableTexture.name = texture.name\n\n if (_renderer) {\n _renderer.dispose()\n _renderer = null\n }\n\n return readableTexture\n}\n\n/**\n * The KHR_mesh_quantization extension allows these extra attribute component types\n *\n * @see https://github.com/KhronosGroup/glTF/blob/main/extensions/2.0/Khronos/KHR_mesh_quantization/README.md#extending-mesh-attributes\n */\nconst KHR_mesh_quantization_ExtraAttrTypes = {\n POSITION: [\n 'byte',\n 'byte normalized',\n 'unsigned byte',\n 'unsigned byte normalized',\n 'short',\n 'short normalized',\n 'unsigned short',\n 'unsigned short normalized',\n ],\n NORMAL: ['byte normalized', 'short normalized'],\n TANGENT: ['byte normalized', 'short normalized'],\n TEXCOORD: ['byte', 'byte normalized', 'unsigned byte', 'short', 'short normalized', 'unsigned short'],\n}\n\nconst GLTFExporter = /* @__PURE__ */ (() => {\n class GLTFExporter {\n /**\n * Static utility functions\n */\n static Utils = {\n insertKeyframe: function (track, time) {\n const tolerance = 0.001 // 1ms\n const valueSize = track.getValueSize()\n\n const times = new track.TimeBufferType(track.times.length + 1)\n const values = new track.ValueBufferType(track.values.length + valueSize)\n const interpolant = track.createInterpolant(new track.ValueBufferType(valueSize))\n\n let index\n\n if (track.times.length === 0) {\n times[0] = time\n\n for (let i = 0; i < valueSize; i++) {\n values[i] = 0\n }\n\n index = 0\n } else if (time < track.times[0]) {\n if (Math.abs(track.times[0] - time) < tolerance) return 0\n\n times[0] = time\n times.set(track.times, 1)\n\n values.set(interpolant.evaluate(time), 0)\n values.set(track.values, valueSize)\n\n index = 0\n } else if (time > track.times[track.times.length - 1]) {\n if (Math.abs(track.times[track.times.length - 1] - time) < tolerance) {\n return track.times.length - 1\n }\n\n times[times.length - 1] = time\n times.set(track.times, 0)\n\n values.set(track.values, 0)\n values.set(interpolant.evaluate(time), track.values.length)\n\n index = times.length - 1\n } else {\n for (let i = 0; i < track.times.length; i++) {\n if (Math.abs(track.times[i] - time) < tolerance) return i\n\n if (track.times[i] < time && track.times[i + 1] > time) {\n times.set(track.times.slice(0, i + 1), 0)\n times[i + 1] = time\n times.set(track.times.slice(i + 1), i + 2)\n\n values.set(track.values.slice(0, (i + 1) * valueSize), 0)\n values.set(interpolant.evaluate(time), (i + 1) * valueSize)\n values.set(track.values.slice((i + 1) * valueSize), (i + 2) * valueSize)\n\n index = i + 1\n\n break\n }\n }\n }\n\n track.times = times\n track.values = values\n\n return index\n },\n\n mergeMorphTargetTracks: function (clip, root) {\n const tracks = []\n const mergedTracks = {}\n const sourceTracks = clip.tracks\n\n for (let i = 0; i < sourceTracks.length; ++i) {\n let sourceTrack = sourceTracks[i]\n const sourceTrackBinding = PropertyBinding.parseTrackName(sourceTrack.name)\n const sourceTrackNode = PropertyBinding.findNode(root, sourceTrackBinding.nodeName)\n\n if (\n sourceTrackBinding.propertyName !== 'morphTargetInfluences' ||\n sourceTrackBinding.propertyIndex === undefined\n ) {\n // Tracks that don't affect morph targets, or that affect all morph targets together, can be left as-is.\n tracks.push(sourceTrack)\n continue\n }\n\n if (\n sourceTrack.createInterpolant !== sourceTrack.InterpolantFactoryMethodDiscrete &&\n sourceTrack.createInterpolant !== sourceTrack.InterpolantFactoryMethodLinear\n ) {\n if (sourceTrack.createInterpolant.isInterpolantFactoryMethodGLTFCubicSpline) {\n // This should never happen, because glTF morph target animations\n // affect all targets already.\n throw new Error('THREE.GLTFExporter: Cannot merge tracks with glTF CUBICSPLINE interpolation.')\n }\n\n console.warn('THREE.GLTFExporter: Morph target interpolation mode not yet supported. Using LINEAR instead.')\n\n sourceTrack = sourceTrack.clone()\n sourceTrack.setInterpolation(InterpolateLinear)\n }\n\n const targetCount = sourceTrackNode.morphTargetInfluences.length\n const targetIndex = sourceTrackNode.morphTargetDictionary[sourceTrackBinding.propertyIndex]\n\n if (targetIndex === undefined) {\n throw new Error('THREE.GLTFExporter: Morph target name not found: ' + sourceTrackBinding.propertyIndex)\n }\n\n let mergedTrack\n\n // If this is the first time we've seen this object, create a new\n // track to store merged keyframe data for each morph target.\n if (mergedTracks[sourceTrackNode.uuid] === undefined) {\n mergedTrack = sourceTrack.clone()\n\n const values = new mergedTrack.ValueBufferType(targetCount * mergedTrack.times.length)\n\n for (let j = 0; j < mergedTrack.times.length; j++) {\n values[j * targetCount + targetIndex] = mergedTrack.values[j]\n }\n\n // We need to take into consideration the intended target node\n // of our original un-merged morphTarget animation.\n mergedTrack.name = (sourceTrackBinding.nodeName || '') + '.morphTargetInfluences'\n mergedTrack.values = values\n\n mergedTracks[sourceTrackNode.uuid] = mergedTrack\n tracks.push(mergedTrack)\n\n continue\n }\n\n const sourceInterpolant = sourceTrack.createInterpolant(new sourceTrack.ValueBufferType(1))\n\n mergedTrack = mergedTracks[sourceTrackNode.uuid]\n\n // For every existing keyframe of the merged track, write a (possibly\n // interpolated) value from the source track.\n for (let j = 0; j < mergedTrack.times.length; j++) {\n mergedTrack.values[j * targetCount + targetIndex] = sourceInterpolant.evaluate(mergedTrack.times[j])\n }\n\n // For every existing keyframe of the source track, write a (possibly\n // new) keyframe to the merged track. Values from the previous loop may\n // be written again, but keyframes are de-duplicated.\n for (let j = 0; j < sourceTrack.times.length; j++) {\n const keyframeIndex = this.insertKeyframe(mergedTrack, sourceTrack.times[j])\n mergedTrack.values[keyframeIndex * targetCount + targetIndex] = sourceTrack.values[j]\n }\n }\n\n clip.tracks = tracks\n\n return clip\n },\n }\n\n constructor() {\n this.pluginCallbacks = []\n\n this.register(function (writer) {\n return new GLTFLightExtension(writer)\n })\n\n this.register(function (writer) {\n return new GLTFMaterialsUnlitExtension(writer)\n })\n\n this.register(function (writer) {\n return new GLTFMaterialsTransmissionExtension(writer)\n })\n\n this.register(function (writer) {\n return new GLTFMaterialsVolumeExtension(writer)\n })\n\n this.register(function (writer) {\n return new GLTFMaterialsIorExtension(writer)\n })\n\n this.register(function (writer) {\n return new GLTFMaterialsSpecularExtension(writer)\n })\n\n this.register(function (writer) {\n return new GLTFMaterialsClearcoatExtension(writer)\n })\n\n this.register(function (writer) {\n return new GLTFMaterialsIridescenceExtension(writer)\n })\n\n this.register(function (writer) {\n return new GLTFMaterialsSheenExtension(writer)\n })\n\n this.register(function (writer) {\n return new GLTFMaterialsAnisotropyExtension(writer)\n })\n\n this.register(function (writer) {\n return new GLTFMaterialsEmissiveStrengthExtension(writer)\n })\n }\n\n register(callback) {\n if (this.pluginCallbacks.indexOf(callback) === -1) {\n this.pluginCallbacks.push(callback)\n }\n\n return this\n }\n\n unregister(callback) {\n if (this.pluginCallbacks.indexOf(callback) !== -1) {\n this.pluginCallbacks.splice(this.pluginCallbacks.indexOf(callback), 1)\n }\n\n return this\n }\n\n /**\n * Parse scenes and generate GLTF output\n * @param {Scene or [THREE.Scenes]} input Scene or Array of THREE.Scenes\n * @param {Function} onDone Callback on completed\n * @param {Function} onError Callback on errors\n * @param {Object} options options\n */\n parse(input, onDone, onError, options) {\n const writer = new GLTFWriter()\n const plugins = []\n\n for (let i = 0, il = this.pluginCallbacks.length; i < il; i++) {\n plugins.push(this.pluginCallbacks[i](writer))\n }\n\n writer.setPlugins(plugins)\n writer.write(input, onDone, options).catch(onError)\n }\n\n parseAsync(input, options) {\n const scope = this\n\n return new Promise(function (resolve, reject) {\n scope.parse(input, resolve, reject, options)\n })\n }\n }\n\n return GLTFExporter\n})()\n\n//------------------------------------------------------------------------------\n// Constants\n//------------------------------------------------------------------------------\n\nconst WEBGL_CONSTANTS = {\n POINTS: 0x0000,\n LINES: 0x0001,\n LINE_LOOP: 0x0002,\n LINE_STRIP: 0x0003,\n TRIANGLES: 0x0004,\n TRIANGLE_STRIP: 0x0005,\n TRIANGLE_FAN: 0x0006,\n\n BYTE: 0x1400,\n UNSIGNED_BYTE: 0x1401,\n SHORT: 0x1402,\n UNSIGNED_SHORT: 0x1403,\n INT: 0x1404,\n UNSIGNED_INT: 0x1405,\n FLOAT: 0x1406,\n\n ARRAY_BUFFER: 0x8892,\n ELEMENT_ARRAY_BUFFER: 0x8893,\n\n NEAREST: 0x2600,\n LINEAR: 0x2601,\n NEAREST_MIPMAP_NEAREST: 0x2700,\n LINEAR_MIPMAP_NEAREST: 0x2701,\n NEAREST_MIPMAP_LINEAR: 0x2702,\n LINEAR_MIPMAP_LINEAR: 0x2703,\n\n CLAMP_TO_EDGE: 33071,\n MIRRORED_REPEAT: 33648,\n REPEAT: 10497,\n}\n\nconst KHR_MESH_QUANTIZATION = 'KHR_mesh_quantization'\n\nconst THREE_TO_WEBGL = {}\n\nTHREE_TO_WEBGL[NearestFilter] = WEBGL_CONSTANTS.NEAREST\nTHREE_TO_WEBGL[NearestMipmapNearestFilter] = WEBGL_CONSTANTS.NEAREST_MIPMAP_NEAREST\nTHREE_TO_WEBGL[NearestMipmapLinearFilter] = WEBGL_CONSTANTS.NEAREST_MIPMAP_LINEAR\nTHREE_TO_WEBGL[LinearFilter] = WEBGL_CONSTANTS.LINEAR\nTHREE_TO_WEBGL[LinearMipmapNearestFilter] = WEBGL_CONSTANTS.LINEAR_MIPMAP_NEAREST\nTHREE_TO_WEBGL[LinearMipmapLinearFilter] = WEBGL_CONSTANTS.LINEAR_MIPMAP_LINEAR\n\nTHREE_TO_WEBGL[ClampToEdgeWrapping] = WEBGL_CONSTANTS.CLAMP_TO_EDGE\nTHREE_TO_WEBGL[RepeatWrapping] = WEBGL_CONSTANTS.REPEAT\nTHREE_TO_WEBGL[MirroredRepeatWrapping] = WEBGL_CONSTANTS.MIRRORED_REPEAT\n\nconst PATH_PROPERTIES = {\n scale: 'scale',\n position: 'translation',\n quaternion: 'rotation',\n morphTargetInfluences: 'weights',\n}\n\nconst DEFAULT_SPECULAR_COLOR = /* @__PURE__ */ new Color()\n\n// GLB constants\n// https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#glb-file-format-specification\n\nconst GLB_HEADER_BYTES = 12\nconst GLB_HEADER_MAGIC = 0x46546c67\nconst GLB_VERSION = 2\n\nconst GLB_CHUNK_PREFIX_BYTES = 8\nconst GLB_CHUNK_TYPE_JSON = 0x4e4f534a\nconst GLB_CHUNK_TYPE_BIN = 0x004e4942\n\n//------------------------------------------------------------------------------\n// Utility functions\n//------------------------------------------------------------------------------\n\n/**\n * Compare two arrays\n * @param {Array} array1 Array 1 to compare\n * @param {Array} array2 Array 2 to compare\n * @return {Boolean} Returns true if both arrays are equal\n */\nfunction equalArray(array1, array2) {\n return (\n array1.length === array2.length &&\n array1.every(function (element, index) {\n return element === array2[index]\n })\n )\n}\n\n/**\n * Converts a string to an ArrayBuffer.\n * @param {string} text\n * @return {ArrayBuffer}\n */\nfunction stringToArrayBuffer(text) {\n return new TextEncoder().encode(text).buffer\n}\n\n/**\n * Is identity matrix\n *\n * @param {Matrix4} matrix\n * @returns {Boolean} Returns true, if parameter is identity matrix\n */\nfunction isIdentityMatrix(matrix) {\n return equalArray(matrix.elements, [1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1])\n}\n\n/**\n * Get the min and max vectors from the given attribute\n * @param {BufferAttribute} attribute Attribute to find the min/max in range from start to start + count\n * @param {Integer} start\n * @param {Integer} count\n * @return {Object} Object containing the `min` and `max` values (As an array of attribute.itemSize components)\n */\nfunction getMinMax(attribute, start, count) {\n const output = {\n min: new Array(attribute.itemSize).fill(Number.POSITIVE_INFINITY),\n max: new Array(attribute.itemSize).fill(Number.NEGATIVE_INFINITY),\n }\n\n for (let i = start; i < start + count; i++) {\n for (let a = 0; a < attribute.itemSize; a++) {\n let value\n\n if (attribute.itemSize > 4) {\n // no support for interleaved data for itemSize > 4\n\n value = attribute.array[i * attribute.itemSize + a]\n } else {\n if (a === 0) value = attribute.getX(i)\n else if (a === 1) value = attribute.getY(i)\n else if (a === 2) value = attribute.getZ(i)\n else if (a === 3) value = attribute.getW(i)\n\n if (attribute.normalized === true) {\n value = MathUtils.normalize(value, attribute.array)\n }\n }\n\n output.min[a] = Math.min(output.min[a], value)\n output.max[a] = Math.max(output.max[a], value)\n }\n }\n\n return output\n}\n\n/**\n * Get the required size + padding for a buffer, rounded to the next 4-byte boundary.\n * https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#data-alignment\n *\n * @param {Integer} bufferSize The size the original buffer.\n * @returns {Integer} new buffer size with required padding.\n *\n */\nfunction getPaddedBufferSize(bufferSize) {\n return Math.ceil(bufferSize / 4) * 4\n}\n\n/**\n * Returns a buffer aligned to 4-byte boundary.\n *\n * @param {ArrayBuffer} arrayBuffer Buffer to pad\n * @param {Integer} paddingByte (Optional)\n * @returns {ArrayBuffer} The same buffer if it's already aligned to 4-byte boundary or a new buffer\n */\nfunction getPaddedArrayBuffer(arrayBuffer, paddingByte = 0) {\n const paddedLength = getPaddedBufferSize(arrayBuffer.byteLength)\n\n if (paddedLength !== arrayBuffer.byteLength) {\n const array = new Uint8Array(paddedLength)\n array.set(new Uint8Array(arrayBuffer))\n\n if (paddingByte !== 0) {\n for (let i = arrayBuffer.byteLength; i < paddedLength; i++) {\n array[i] = paddingByte\n }\n }\n\n return array.buffer\n }\n\n return arrayBuffer\n}\n\nfunction getCanvas() {\n if (typeof document === 'undefined' && typeof OffscreenCanvas !== 'undefined') {\n return new OffscreenCanvas(1, 1)\n }\n\n return document.createElement('canvas')\n}\n\nfunction getToBlobPromise(canvas, mimeType) {\n if (canvas.toBlob !== undefined) {\n return new Promise((resolve) => canvas.toBlob(resolve, mimeType))\n }\n\n let quality\n\n // Blink's implementation of convertToBlob seems to default to a quality level of 100%\n // Use the Blink default quality levels of toBlob instead so that file sizes are comparable.\n if (mimeType === 'image/jpeg') {\n quality = 0.92\n } else if (mimeType === 'image/webp') {\n quality = 0.8\n }\n\n return canvas.convertToBlob({\n type: mimeType,\n quality: quality,\n })\n}\n\n/**\n * Writer\n */\nclass GLTFWriter {\n constructor() {\n this.plugins = []\n\n this.options = {}\n this.pending = []\n this.buffers = []\n\n this.byteOffset = 0\n this.buffers = []\n this.nodeMap = new Map()\n this.skins = []\n\n this.extensionsUsed = {}\n this.extensionsRequired = {}\n\n this.uids = new Map()\n this.uid = 0\n\n this.json = {\n asset: {\n version: '2.0',\n generator: 'THREE.GLTFExporter',\n },\n }\n\n this.cache = {\n meshes: new Map(),\n attributes: new Map(),\n attributesNormalized: new Map(),\n materials: new Map(),\n textures: new Map(),\n images: new Map(),\n }\n }\n\n setPlugins(plugins) {\n this.plugins = plugins\n }\n\n /**\n * Parse scenes and generate GLTF output\n * @param {Scene or [THREE.Scenes]} input Scene or Array of THREE.Scenes\n * @param {Function} onDone Callback on completed\n * @param {Object} options options\n */\n async write(input, onDone, options = {}) {\n this.options = Object.assign(\n {\n // default options\n binary: false,\n trs: false,\n onlyVisible: true,\n maxTextureSize: Infinity,\n animations: [],\n includeCustomExtensions: false,\n },\n options,\n )\n\n if (this.options.animations.length > 0) {\n // Only TRS properties, and not matrices, may be targeted by animation.\n this.options.trs = true\n }\n\n this.processInput(input)\n\n await Promise.all(this.pending)\n\n const writer = this\n const buffers = writer.buffers\n const json = writer.json\n options = writer.options\n\n const extensionsUsed = writer.extensionsUsed\n const extensionsRequired = writer.extensionsRequired\n\n // Merge buffers.\n const blob = new Blob(buffers, { type: 'application/octet-stream' })\n\n // Declare extensions.\n const extensionsUsedList = Object.keys(extensionsUsed)\n const extensionsRequiredList = Object.keys(extensionsRequired)\n\n if (extensionsUsedList.length > 0) json.extensionsUsed = extensionsUsedList\n if (extensionsRequiredList.length > 0) json.extensionsRequired = extensionsRequiredList\n\n // Update bytelength of the single buffer.\n if (json.buffers && json.buffers.length > 0) json.buffers[0].byteLength = blob.size\n\n if (options.binary === true) {\n // https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#glb-file-format-specification\n\n blob.arrayBuffer().then((result) => {\n // Binary chunk.\n const binaryChunk = getPaddedArrayBuffer(result)\n const binaryChunkPrefix = new DataView(new ArrayBuffer(GLB_CHUNK_PREFIX_BYTES))\n binaryChunkPrefix.setUint32(0, binaryChunk.byteLength, true)\n binaryChunkPrefix.setUint32(4, GLB_CHUNK_TYPE_BIN, true)\n\n // JSON chunk.\n const jsonChunk = getPaddedArrayBuffer(stringToArrayBuffer(JSON.stringify(json)), 0x20)\n const jsonChunkPrefix = new DataView(new ArrayBuffer(GLB_CHUNK_PREFIX_BYTES))\n jsonChunkPrefix.setUint32(0, jsonChunk.byteLength, true)\n jsonChunkPrefix.setUint32(4, GLB_CHUNK_TYPE_JSON, true)\n\n // GLB header.\n const header = new ArrayBuffer(GLB_HEADER_BYTES)\n const headerView = new DataView(header)\n headerView.setUint32(0, GLB_HEADER_MAGIC, true)\n headerView.setUint32(4, GLB_VERSION, true)\n const totalByteLength =\n GLB_HEADER_BYTES +\n jsonChunkPrefix.byteLength +\n jsonChunk.byteLength +\n binaryChunkPrefix.byteLength +\n binaryChunk.byteLength\n headerView.setUint32(8, totalByteLength, true)\n\n const glbBlob = new Blob([header, jsonChunkPrefix, jsonChunk, binaryChunkPrefix, binaryChunk], {\n type: 'application/octet-stream',\n })\n\n glbBlob.arrayBuffer().then(onDone)\n })\n } else {\n if (json.buffers && json.buffers.length > 0) {\n readAsDataURL(blob).then((uri) => {\n json.buffers[0].uri = uri\n onDone(json)\n })\n } else {\n onDone(json)\n }\n }\n }\n\n /**\n * Serializes a userData.\n *\n * @param {THREE.Object3D|THREE.Material} object\n * @param {Object} objectDef\n */\n serializeUserData(object, objectDef) {\n if (Object.keys(object.userData).length === 0) return\n\n const options = this.options\n const extensionsUsed = this.extensionsUsed\n\n try {\n const json = JSON.parse(JSON.stringify(object.userData))\n\n if (options.includeCustomExtensions && json.gltfExtensions) {\n if (objectDef.extensions === undefined) objectDef.extensions = {}\n\n for (const extensionName in json.gltfExtensions) {\n objectDef.extensions[extensionName] = json.gltfExtensions[extensionName]\n extensionsUsed[extensionName] = true\n }\n\n delete json.gltfExtensions\n }\n\n if (Object.keys(json).length > 0) objectDef.extras = json\n } catch (error) {\n console.warn(\n \"THREE.GLTFExporter: userData of '\" +\n object.name +\n \"' \" +\n \"won't be serialized because of JSON.stringify error - \" +\n error.message,\n )\n }\n }\n\n /**\n * Returns ids for buffer attributes.\n * @param {Object} object\n * @return {Integer}\n */\n getUID(attribute, isRelativeCopy = false) {\n if (this.uids.has(attribute) === false) {\n const uids = new Map()\n\n uids.set(true, this.uid++)\n uids.set(false, this.uid++)\n\n this.uids.set(attribute, uids)\n }\n\n const uids = this.uids.get(attribute)\n\n return uids.get(isRelativeCopy)\n }\n\n /**\n * Checks if normal attribute values are normalized.\n *\n * @param {BufferAttribute} normal\n * @returns {Boolean}\n */\n isNormalizedNormalAttribute(normal) {\n const cache = this.cache\n\n if (cache.attributesNormalized.has(normal)) return false\n\n const v = new Vector3()\n\n for (let i = 0, il = normal.count; i < il; i++) {\n // 0.0005 is from glTF-validator\n if (Math.abs(v.fromBufferAttribute(normal, i).length() - 1.0) > 0.0005) return false\n }\n\n return true\n }\n\n /**\n * Creates normalized normal buffer attribute.\n *\n * @param {BufferAttribute} normal\n * @returns {BufferAttribute}\n *\n */\n createNormalizedNormalAttribute(normal) {\n const cache = this.cache\n\n if (cache.attributesNormalized.has(normal)) return cache.attributesNormalized.get(normal)\n\n const attribute = normal.clone()\n const v = new Vector3()\n\n for (let i = 0, il = attribute.count; i < il; i++) {\n v.fromBufferAttribute(attribute, i)\n\n if (v.x === 0 && v.y === 0 && v.z === 0) {\n // if values can't be normalized set (1, 0, 0)\n v.setX(1.0)\n } else {\n v.normalize()\n }\n\n attribute.setXYZ(i, v.x, v.y, v.z)\n }\n\n cache.attributesNormalized.set(normal, attribute)\n\n return attribute\n }\n\n /**\n * Applies a texture transform, if present, to the map definition. Requires\n * the KHR_texture_transform extension.\n *\n * @param {Object} mapDef\n * @param {THREE.Texture} texture\n */\n applyTextureTransform(mapDef, texture) {\n let didTransform = false\n const transformDef = {}\n\n if (texture.offset.x !== 0 || texture.offset.y !== 0) {\n transformDef.offset = texture.offset.toArray()\n didTransform = true\n }\n\n if (texture.rotation !== 0) {\n transformDef.rotation = texture.rotation\n didTransform = true\n }\n\n if (texture.repeat.x !== 1 || texture.repeat.y !== 1) {\n transformDef.scale = texture.repeat.toArray()\n didTransform = true\n }\n\n if (didTransform) {\n mapDef.extensions = mapDef.extensions || {}\n mapDef.extensions['KHR_texture_transform'] = transformDef\n this.extensionsUsed['KHR_texture_transform'] = true\n }\n }\n\n buildMetalRoughTexture(metalnessMap, roughnessMap) {\n if (metalnessMap === roughnessMap) return metalnessMap\n\n function getEncodingConversion(map) {\n if ('colorSpace' in map ? map.colorSpace === 'srgb' : map.encoding === 3001) {\n return function SRGBToLinear(c) {\n return c < 0.04045 ? c * 0.0773993808 : Math.pow(c * 0.9478672986 + 0.0521327014, 2.4)\n }\n }\n\n return function LinearToLinear(c) {\n return c\n }\n }\n\n console.warn('THREE.GLTFExporter: Merged metalnessMap and roughnessMap textures.')\n\n if (metalnessMap instanceof CompressedTexture) {\n metalnessMap = decompress(metalnessMap)\n }\n\n if (roughnessMap instanceof CompressedTexture) {\n roughnessMap = decompress(roughnessMap)\n }\n\n const metalness = metalnessMap ? metalnessMap.image : null\n const roughness = roughnessMap ? roughnessMap.image : null\n\n const width = Math.max(metalness ? metalness.width : 0, roughness ? roughness.width : 0)\n const height = Math.max(metalness ? metalness.height : 0, roughness ? roughness.height : 0)\n\n const canvas = getCanvas()\n canvas.width = width\n canvas.height = height\n\n const context = canvas.getContext('2d')\n context.fillStyle = '#00ffff'\n context.fillRect(0, 0, width, height)\n\n const composite = context.getImageData(0, 0, width, height)\n\n if (metalness) {\n context.drawImage(metalness, 0, 0, width, height)\n\n const convert = getEncodingConversion(metalnessMap)\n const data = context.getImageData(0, 0, width, height).data\n\n for (let i = 2; i < data.length; i += 4) {\n composite.data[i] = convert(data[i] / 256) * 256\n }\n }\n\n if (roughness) {\n context.drawImage(roughness, 0, 0, width, height)\n\n const convert = getEncodingConversion(roughnessMap)\n const data = context.getImageData(0, 0, width, height).data\n\n for (let i = 1; i < data.length; i += 4) {\n composite.data[i] = convert(data[i] / 256) * 256\n }\n }\n\n context.putImageData(composite, 0, 0)\n\n //\n\n const reference = metalnessMap || roughnessMap\n\n const texture = reference.clone()\n\n // TODO Use new Source() instead?\n texture.source = new Texture(canvas).source\n if ('colorSpace' in texture) texture.colorSpace = ''\n else texture.encoding = 3000\n texture.channel = (metalnessMap || roughnessMap).channel\n\n if (metalnessMap && roughnessMap && metalnessMap.channel !== roughnessMap.channel) {\n console.warn('THREE.GLTFExporter: UV channels for metalnessMap and roughnessMap textures must match.')\n }\n\n return texture\n }\n\n /**\n * Process a buffer to append to the default one.\n * @param {ArrayBuffer} buffer\n * @return {Integer}\n */\n processBuffer(buffer) {\n const json = this.json\n const buffers = this.buffers\n\n if (!json.buffers) json.buffers = [{ byteLength: 0 }]\n\n // All buffers are merged before export.\n buffers.push(buffer)\n\n return 0\n }\n\n /**\n * Process and generate a BufferView\n * @param {BufferAttribute} attribute\n * @param {number} componentType\n * @param {number} start\n * @param {number} count\n * @param {number} target (Optional) Target usage of the BufferView\n * @return {Object}\n */\n processBufferView(attribute, componentType, start, count, target) {\n const json = this.json\n\n if (!json.bufferViews) json.bufferViews = []\n\n // Create a new dataview and dump the attribute's array into it\n\n let componentSize\n\n switch (componentType) {\n case WEBGL_CONSTANTS.BYTE:\n case WEBGL_CONSTANTS.UNSIGNED_BYTE:\n componentSize = 1\n\n break\n\n case WEBGL_CONSTANTS.SHORT:\n case WEBGL_CONSTANTS.UNSIGNED_SHORT:\n componentSize = 2\n\n break\n\n default:\n componentSize = 4\n }\n\n let byteStride = attribute.itemSize * componentSize\n if (target === WEBGL_CONSTANTS.ARRAY_BUFFER) {\n // Each element of a vertex attribute MUST be aligned to 4-byte boundaries\n // inside a bufferView\n byteStride = Math.ceil(byteStride / 4) * 4\n }\n const byteLength = getPaddedBufferSize(count * byteStride)\n const dataView = new DataView(new ArrayBuffer(byteLength))\n let offset = 0\n\n for (let i = start; i < start + count; i++) {\n for (let a = 0; a < attribute.itemSize; a++) {\n let value\n\n if (attribute.itemSize > 4) {\n // no support for interleaved data for itemSize > 4\n\n value = attribute.array[i * attribute.itemSize + a]\n } else {\n if (a === 0) value = attribute.getX(i)\n else if (a === 1) value = attribute.getY(i)\n else if (a === 2) value = attribute.getZ(i)\n else if (a === 3) value = attribute.getW(i)\n\n if (attribute.normalized === true) {\n value = MathUtils.normalize(value, attribute.array)\n }\n }\n\n if (componentType === WEBGL_CONSTANTS.FLOAT) {\n dataView.setFloat32(offset, value, true)\n } else if (componentType === WEBGL_CONSTANTS.INT) {\n dataView.setInt32(offset, value, true)\n } else if (componentType === WEBGL_CONSTANTS.UNSIGNED_INT) {\n dataView.setUint32(offset, value, true)\n } else if (componentType === WEBGL_CONSTANTS.SHORT) {\n dataView.setInt16(offset, value, true)\n } else if (componentType === WEBGL_CONSTANTS.UNSIGNED_SHORT) {\n dataView.setUint16(offset, value, true)\n } else if (componentType === WEBGL_CONSTANTS.BYTE) {\n dataView.setInt8(offset, value)\n } else if (componentType === WEBGL_CONSTANTS.UNSIGNED_BYTE) {\n dataView.setUint8(offset, value)\n }\n\n offset += componentSize\n }\n if (offset % byteStride !== 0) {\n offset += byteStride - (offset % byteStride)\n }\n }\n\n const bufferViewDef = {\n buffer: this.processBuffer(dataView.buffer),\n byteOffset: this.byteOffset,\n byteLength: byteLength,\n }\n\n if (target !== undefined) bufferViewDef.target = target\n\n if (target === WEBGL_CONSTANTS.ARRAY_BUFFER) {\n // Only define byteStride for vertex attributes.\n bufferViewDef.byteStride = byteStride\n }\n\n this.byteOffset += byteLength\n\n json.bufferViews.push(bufferViewDef)\n\n // @TODO Merge bufferViews where possible.\n const output = {\n id: json.bufferViews.length - 1,\n byteLength: 0,\n }\n\n return output\n }\n\n /**\n * Process and generate a BufferView from an image Blob.\n * @param {Blob} blob\n * @return {Promise<Integer>}\n */\n processBufferViewImage(blob) {\n const writer = this\n const json = writer.json\n\n if (!json.bufferViews) json.bufferViews = []\n\n return blob.arrayBuffer().then((result) => {\n const buffer = getPaddedArrayBuffer(result)\n\n const bufferViewDef = {\n buffer: writer.processBuffer(buffer),\n byteOffset: writer.byteOffset,\n byteLength: buffer.byteLength,\n }\n\n writer.byteOffset += buffer.byteLength\n return json.bufferViews.push(bufferViewDef) - 1\n })\n }\n\n /**\n * Process attribute to generate an accessor\n * @param {BufferAttribute} attribute Attribute to process\n * @param {THREE.BufferGeometry} geometry (Optional) Geometry used for truncated draw range\n * @param {Integer} start (Optional)\n * @param {Integer} count (Optional)\n * @return {Integer|null} Index of the processed accessor on the \"accessors\" array\n */\n processAccessor(attribute, geometry, start, count) {\n const json = this.json\n\n const types = {\n 1: 'SCALAR',\n 2: 'VEC2',\n 3: 'VEC3',\n 4: 'VEC4',\n 9: 'MAT3',\n 16: 'MAT4',\n }\n\n let componentType\n\n // Detect the component type of the attribute array\n if (attribute.array.constructor === Float32Array) {\n componentType = WEBGL_CONSTANTS.FLOAT\n } else if (attribute.array.constructor === Int32Array) {\n componentType = WEBGL_CONSTANTS.INT\n } else if (attribute.array.constructor === Uint32Array) {\n componentType = WEBGL_CONSTANTS.UNSIGNED_INT\n } else if (attribute.array.constructor === Int16Array) {\n componentType = WEBGL_CONSTANTS.SHORT\n } else if (attribute.array.constructor === Uint16Array) {\n componentType = WEBGL_CONSTANTS.UNSIGNED_SHORT\n } else if (attribute.array.constructor === Int8Array) {\n componentType = WEBGL_CONSTANTS.BYTE\n } else if (attribute.array.constructor === Uint8Array) {\n componentType = WEBGL_CONSTANTS.UNSIGNED_BYTE\n } else {\n throw new Error(\n 'THREE.GLTFExporter: Unsupported bufferAttribute component type: ' + attribute.array.constructor.name,\n )\n }\n\n if (start === undefined) start = 0\n if (count === undefined) count = attribute.count\n\n // Skip creating an accessor if the attribute doesn't have data to export\n if (count === 0) return null\n\n const minMax = getMinMax(attribute, start, count)\n let bufferViewTarget\n\n // If geometry isn't provided, don't infer the target usage of the bufferView. For\n // animation samplers, target must not be set.\n if (geometry !== undefined) {\n bufferViewTarget =\n attribute === geometry.index ? WEBGL_CONSTANTS.ELEMENT_ARRAY_BUFFER : WEBGL_CONSTANTS.ARRAY_BUFFER\n }\n\n const bufferView = this.processBufferView(attribute, componentType, start, count, bufferViewTarget)\n\n const accessorDef = {\n bufferView: bufferView.id,\n byteOffset: bufferView.byteOffset,\n componentType: componentType,\n count: count,\n max: minMax.max,\n min: minMax.min,\n type: types[attribute.itemSize],\n }\n\n if (attribute.normalized === true) accessorDef.normalized = true\n if (!json.accessors) json.accessors = []\n\n return json.accessors.push(accessorDef) - 1\n }\n\n /**\n * Process image\n * @param {Image} image to process\n * @param {Integer} format of the image (RGBAFormat)\n * @param {Boolean} flipY before writing out the image\n * @param {String} mimeType export format\n * @return {Integer} Index of the processed texture in the \"images\" array\n */\n processImage(image, format, flipY, mimeType = 'image/png') {\n if (image !== null) {\n const writer = this\n const cache = writer.cache\n const json = writer.json\n const options = writer.options\n const pending = writer.pending\n\n if (!cache.images.has(image)) cache.images.set(image, {})\n\n const cachedImages = cache.images.get(image)\n\n const key = mimeType + ':flipY/' + flipY.toString()\n\n if (cachedImages[key] !== undefined) return cachedImages[key]\n\n if (!json.images) json.images = []\n\n const imageDef = { mimeType: mimeType }\n\n const canvas = getCanvas()\n\n canvas.width = Math.min(image.width, options.maxTextureSize)\n canvas.height = Math.min(image.height, options.maxTextureSize)\n\n const ctx = canvas.getContext('2d')\n\n if (flipY === true) {\n ctx.translate(0, canvas.height)\n ctx.scale(1, -1)\n }\n\n if (image.data !== undefined) {\n // THREE.DataTexture\n\n if (format !== RGBAFormat) {\n console.error('GLTFExporter: Only RGBAFormat is supported.', format)\n }\n\n if (image.width > options.maxTextureSize || image.height > options.maxTextureSize) {\n console.warn('GLTFExporter: Image size is bigger than maxTextureSize', image)\n }\n\n const data = new Uint8ClampedArray(image.height * image.width * 4)\n\n for (let i = 0; i < data.length; i += 4) {\n data[i + 0] = image.data[i + 0]\n data[i + 1] = image.data[i + 1]\n data[i + 2] = image.data[i + 2]\n data[i + 3] = image.data[i + 3]\n }\n\n ctx.putImageData(new ImageData(data, image.width, image.height), 0, 0)\n } else {\n ctx.drawImage(image, 0, 0, canvas.width, canvas.height)\n }\n\n if (options.binary === true) {\n pending.push(\n getToBlobPromise(canvas, mimeType)\n .then((blob) => writer.processBufferViewImage(blob))\n .then((bufferViewIndex) => {\n imageDef.bufferView = bufferViewIndex\n }),\n )\n } else {\n if (canvas.toDataURL !== undefined) {\n imageDef.uri = canvas.toDataURL(mimeType)\n } else {\n pending.push(\n getToBlobPromise(canvas, mimeType)\n .then(readAsDataURL)\n .then((uri) => {\n imageDef.uri = uri\n }),\n )\n }\n }\n\n const index = json.images.push(imageDef) - 1\n cachedImages[key] = index\n return index\n } else {\n throw new Error('THREE.GLTFExporter: No valid image data found. Unable to process texture.')\n }\n }\n\n /**\n * Process sampler\n * @param {Texture} map Texture to process\n * @return {Integer} Index of the processed texture in the \"samplers\" array\n */\n processSampler(map) {\n const json = this.json\n\n if (!json.samplers) json.samplers = []\n\n const samplerDef = {\n magFilter: THREE_TO_WEBGL[map.magFilter],\n minFilter: THREE_TO_WEBGL[map.minFilter],\n wrapS: THREE_TO_WEBGL[map.wrapS],\n wrapT: THREE_TO_WEBGL[map.wrapT],\n }\n\n return json.samplers.push(samplerDef) - 1\n }\n\n /**\n * Process texture\n * @param {Texture} map Map to process\n * @return {Integer} Index of the processed texture in the \"textures\" array\n */\n processTexture(map) {\n const writer = this\n const options = writer.options\n const cache = this.cache\n const json = this.json\n\n if (cache.textures.has(map)) return cache.textures.get(map)\n\n if (!json.textures) json.textures = []\n\n // make non-readable textures (e.g. CompressedTexture) readable by blitting them into a new texture\n if (map instanceof CompressedTexture) {\n map = decompress(map, options.maxTextureSize)\n }\n\n let mimeType = map.userData.mimeType\n\n if (mimeType === 'image/webp') mimeType = 'image/png'\n\n const textureDef = {\n sampler: this.processSampler(map),\n source: this.processImage(map.image, map.format, map.flipY, mimeType),\n }\n\n if (map.name) textureDef.name = map.name\n\n this._invokeAll(function (ext) {\n ext.writeTexture && ext.writeTexture(map, textureDef)\n })\n\n const index = json.textures.push(textureDef) - 1\n cache.textures.set(map, index)\n return index\n }\n\n /**\n * Process material\n * @param {THREE.Material} material Material to process\n * @return {Integer|null} Index of the processed material in the \"materials\" array\n */\n processMaterial(material) {\n const cache = this.cache\n const json = this.json\n\n if (cache.materials.has(material)) return cache.materials.get(material)\n\n if (material.isShaderMaterial) {\n console.warn('GLTFExporter: THREE.ShaderMaterial not supported.')\n return null\n }\n\n if (!json.materials) json.materials = []\n\n // @QUESTION Should we avoid including any attribute that has the default value?\n const materialDef = { pbrMetallicRoughness: {} }\n\n if (material.isMeshStandardMaterial !== true && material.isMeshBasicMaterial !== true) {\n console.warn('GLTFExporter: Use MeshStandardMaterial or MeshBasicMaterial for best results.')\n }\n\n // pbrMetallicRoughness.baseColorFactor\n const color = material.color.toArray().concat([material.opacity])\n\n if (!equalArray(color, [1, 1, 1, 1])) {\n materialDef.pbrMetallicRoughness.baseColorFactor = color\n }\n\n if (material.isMeshStandardMaterial) {\n materialDef.pbrMetallicRoughness.metallicFactor = material.metalness\n materialDef.pbrMetallicRoughness.roughnessFactor = material.roughness\n } else {\n materialDef.pbrMetallicRoughness.metallicFactor = 0.5\n materialDef.pbrMetallicRoughness.roughnessFactor = 0.5\n }\n\n // pbrMetallicRoughness.metallicRoughnessTexture\n if (material.metalnessMap || material.roughnessMap) {\n const metalRoughTexture = this.buildMetalRoughTexture(material.metalnessMap, material.roughnessMap)\n\n const metalRoughMapDef = {\n index: this.processTexture(metalRoughTexture),\n channel: metalRoughTexture.channel,\n }\n this.applyTextureTransform(metalRoughMapDef, metalRoughTexture)\n materialDef.pbrMetallicRoughness.metallicRoughnessTexture = metalRoughMapDef\n }\n\n // pbrMetallicRoughness.baseColorTexture\n if (material.map) {\n const baseColorMapDef = {\n index: this.processTexture(material.map),\n texCoord: material.map.channel,\n }\n this.applyTextureTransform(baseColorMapDef, material.map)\n materialDef.pbrMetallicRoughness.baseColorTexture = baseColorMapDef\n }\n\n if (material.emissive) {\n const emissive = material.emissive\n const maxEmissiveComponent = Math.max(emissive.r, emissive.g, emissive.b)\n\n if (maxEmissiveComponent > 0) {\n materialDef.emissiveFactor = material.emissive.toArray()\n }\n\n // emissiveTexture\n if (material.emissiveMap) {\n const emissiveMapDef = {\n index: this.processTexture(material.emissiveMap),\n texCoord: material.emissiveMap.channel,\n }\n this.applyTextureTransform(emissiveMapDef, material.emissiveMap)\n materialDef.emissiveTexture = emissiveMapDef\n }\n }\n\n // normalTexture\n if (material.normalMap) {\n const normalMapDef = {\n index: this.processTexture(material.normalMap),\n texCoord: material.normalMap.channel,\n }\n\n if (material.normalScale && material.normalScale.x !== 1) {\n // glTF normal scale is univariate. Ignore `y`, which may be flipped.\n // Context: https://github.com/mrdoob/three.js/issues/11438#issuecomment-507003995\n normalMapDef.scale = material.normalScale.x\n }\n\n this.applyTextureTransform(normalMapDef, material.normalMap)\n materialDef.normalTexture = normalMapDef\n }\n\n // occlusionTexture\n if (material.aoMap) {\n const occlusionMapDef = {\n index: this.processTexture(material.aoMap),\n texCoord: material.aoMap.channel,\n }\n\n if (material.aoMapIntensity !== 1.0) {\n occlusionMapDef.strength = material.aoMapIntensity\n }\n\n this.applyTextureTransform(occlusionMapDef, material.aoMap)\n materialDef.occlusionTexture = occlusionMapDef\n }\n\n // alphaMode\n if (material.transparent) {\n materialDef.alphaMode = 'BLEND'\n } else {\n if (material.alphaTest > 0.0) {\n materialDef.alphaMode = 'MASK'\n materialDef.alphaCutoff = material.alphaTest\n }\n }\n\n // doubleSided\n if (material.side === DoubleSide) materialDef.doubleSided = true\n if (material.name !== '') materialDef.name = material.name\n\n this.serializeUserData(material, materialDef)\n\n this._invokeAll(function (ext) {\n ext.writeMaterial && ext.writeMaterial(material, materialDef)\n })\n\n const index = json.materials.push(materialDef) - 1\n cache.materials.set(material, index)\n return index\n }\n\n /**\n * Process mesh\n * @param {THREE.Mesh} mesh Mesh to process\n * @return {Integer|null} Index of the processed mesh in the \"meshes\" array\n */\n processMesh(mesh) {\n const cache = this.cache\n const json = this.json\n\n const meshCacheKeyParts = [mesh.geometry.uuid]\n\n if (Array.isArray(mesh.material)) {\n for (let i = 0, l = mesh.material.length; i < l; i++) {\n meshCacheKeyParts.push(mesh.material[i].uuid)\n }\n } else {\n meshCacheKeyParts.push(mesh.material.uuid)\n }\n\n const meshCacheKey = meshCacheKeyParts.join(':')\n\n if (cache.meshes.has(meshCacheKey)) return cache.meshes.get(meshCacheKey)\n\n const geometry = mesh.geometry\n\n let mode\n\n // Use the correct mode\n if (mesh.isLineSegments) {\n mode = WEBGL_CONSTANTS.LINES\n } else if (mesh.isLineLoop) {\n mode = WEBGL_CONSTANTS.LINE_LOOP\n } else if (mesh.isLine) {\n mode = WEBGL_CONSTANTS.LINE_STRIP\n } else if (mesh.isPoints) {\n mode = WEBGL_CONSTANTS.POINTS\n } else {\n mode = mesh.material.wireframe ? WEBGL_CONSTANTS.LINES : WEBGL_CONSTANTS.TRIANGLES\n }\n\n const meshDef = {}\n const attributes = {}\n const primitives = []\n const targets = []\n\n // Conversion between attributes names in threejs and gltf spec\n const nameConversion = {\n ...(version >= 152\n ? {\n uv: 'TEXCOORD_0',\n uv1: 'TEXCOORD_1',\n uv2: 'TEXCOORD_2',\n uv3: 'TEXCOORD_3',\n }\n : {\n uv: 'TEXCOORD_0',\n uv2: 'TEXCOORD_1',\n }),\n color: 'COLOR_0',\n skinWeight: 'WEIGHTS_0',\n skinIndex: 'JOINTS_0',\n }\n\n const originalNormal = geometry.getAttribute('normal')\n\n if (originalNormal !== undefined && !this.isNormalizedNormalAttribute(originalNormal)) {\n console.warn('THREE.GLTFExporter: Creating normalized normal attribute from the non-normalized one.')\n\n geometry.setAttribute('normal', this.createNormalizedNormalAttribute(originalNormal))\n }\n\n // @QUESTION Detect if .vertexColors = true?\n // For every attribute create an accessor\n let modifiedAttribute = null\n\n for (let attributeName in geometry.attributes) {\n // Ignore morph target attributes, which are exported later.\n if (attributeName.slice(0, 5) === 'morph') continue\n\n const attribute = geometry.attributes[attributeName]\n attributeName = nameConversion[attributeName] || attributeName.toUpperCase()\n\n // Prefix all geometry attributes except the ones specifically\n // listed in the spec; non-spec attributes are considered custom.\n const validVertexAttributes = /^(POSITION|NORMAL|TANGENT|TEXCOORD_\\d+|COLOR_\\d+|JOINTS_\\d+|WEIGHTS_\\d+)$/\n\n if (!validVertexAttributes.test(attributeName)) attributeName = '_' + attributeName\n\n if (cache.attributes.has(this.getUID(attribute))) {\n attributes[attributeName] = cache.attributes.get(this.getUID(attribute))\n continue\n }\n\n // JOINTS_0 must be UNSIGNED_BYTE or UNSIGNED_SHORT.\n modifiedAttribute = null\n const array = attribute.array\n\n if (attributeName === 'JOINTS_0' && !(array instanceof Uint16Array) && !(array instanceof Uint8Array))