UNPKG

playcanvas

Version:

PlayCanvas WebGL game engine

1,205 lines (1,204 loc) 94 kB
import { Debug } from '../../core/debug.js'; import { path } from '../../core/path.js'; import { Color } from '../../core/math/color.js'; import { Mat4 } from '../../core/math/mat4.js'; import { math } from '../../core/math/math.js'; import { Vec2 } from '../../core/math/vec2.js'; import { Vec3 } from '../../core/math/vec3.js'; import { BoundingBox } from '../../core/shape/bounding-box.js'; import { CULLFACE_NONE, CULLFACE_BACK, FILTER_LINEAR_MIPMAP_LINEAR, FILTER_NEAREST_MIPMAP_LINEAR, FILTER_LINEAR_MIPMAP_NEAREST, FILTER_NEAREST_MIPMAP_NEAREST, FILTER_LINEAR, FILTER_NEAREST, ADDRESS_REPEAT, ADDRESS_MIRRORED_REPEAT, ADDRESS_CLAMP_TO_EDGE, INDEXFORMAT_UINT8, INDEXFORMAT_UINT16, BUFFER_STATIC, SEMANTIC_TEXCOORD7, SEMANTIC_TEXCOORD6, SEMANTIC_TEXCOORD5, SEMANTIC_TEXCOORD4, SEMANTIC_TEXCOORD3, SEMANTIC_TEXCOORD2, SEMANTIC_TEXCOORD1, SEMANTIC_TEXCOORD0, SEMANTIC_BLENDWEIGHT, SEMANTIC_BLENDINDICES, SEMANTIC_COLOR, SEMANTIC_TANGENT, SEMANTIC_NORMAL, SEMANTIC_POSITION, TYPE_UINT8, TYPE_UINT16, PRIMITIVE_TRIANGLES, PRIMITIVE_TRIFAN, PRIMITIVE_TRISTRIP, PRIMITIVE_LINESTRIP, PRIMITIVE_LINELOOP, PRIMITIVE_LINES, PRIMITIVE_POINTS, TYPE_FLOAT32, TYPE_UINT32, TYPE_INT32, TYPE_INT16, TYPE_INT8, typedArrayTypesByteSize, typedArrayTypes, INDEXFORMAT_UINT32 } from '../../platform/graphics/constants.js'; import { IndexBuffer } from '../../platform/graphics/index-buffer.js'; import { Texture } from '../../platform/graphics/texture.js'; import { VertexBuffer } from '../../platform/graphics/vertex-buffer.js'; import { VertexFormat } from '../../platform/graphics/vertex-format.js'; import { http } from '../../platform/net/http.js'; import { SPECOCC_AO, BLEND_NONE, BLEND_NORMAL, LIGHTFALLOFF_INVERSESQUARED, PROJECTION_ORTHOGRAPHIC, PROJECTION_PERSPECTIVE, ASPECT_AUTO, ASPECT_MANUAL } from '../../scene/constants.js'; import { GraphNode } from '../../scene/graph-node.js'; import { Light, lightTypes } from '../../scene/light.js'; import { Mesh } from '../../scene/mesh.js'; import { Morph } from '../../scene/morph.js'; import { MorphTarget } from '../../scene/morph-target.js'; import { calculateNormals } from '../../scene/geometry/geometry-utils.js'; import { Render } from '../../scene/render.js'; import { Skin } from '../../scene/skin.js'; import { StandardMaterial } from '../../scene/materials/standard-material.js'; import { Entity } from '../entity.js'; import { INTERPOLATION_CUBIC, INTERPOLATION_LINEAR, INTERPOLATION_STEP } from '../anim/constants.js'; import { AnimCurve } from '../anim/evaluator/anim-curve.js'; import { AnimData } from '../anim/evaluator/anim-data.js'; import { AnimTrack } from '../anim/evaluator/anim-track.js'; import { Asset } from '../asset/asset.js'; import { ABSOLUTE_URL } from '../asset/constants.js'; import { dracoDecode } from './draco-decoder.js'; import { Quat } from '../../core/math/quat.js'; // resources loaded from GLB file that the parser returns class GlbResources { destroy() { // render needs to dec ref meshes if (this.renders) { this.renders.forEach((render)=>{ render.meshes = null; }); } } } const isDataURI = (uri)=>{ return /^data:[^\n\r,\u2028\u2029]*,.*$/i.test(uri); }; const getDataURIMimeType = (uri)=>{ return uri.substring(uri.indexOf(':') + 1, uri.indexOf(';')); }; const getNumComponents = (accessorType)=>{ switch(accessorType){ case 'SCALAR': return 1; case 'VEC2': return 2; case 'VEC3': return 3; case 'VEC4': return 4; case 'MAT2': return 4; case 'MAT3': return 9; case 'MAT4': return 16; default: return 3; } }; const getComponentType = (componentType)=>{ switch(componentType){ case 5120: return TYPE_INT8; case 5121: return TYPE_UINT8; case 5122: return TYPE_INT16; case 5123: return TYPE_UINT16; case 5124: return TYPE_INT32; case 5125: return TYPE_UINT32; case 5126: return TYPE_FLOAT32; default: return 0; } }; const getComponentSizeInBytes = (componentType)=>{ switch(componentType){ case 5120: return 1; // int8 case 5121: return 1; // uint8 case 5122: return 2; // int16 case 5123: return 2; // uint16 case 5124: return 4; // int32 case 5125: return 4; // uint32 case 5126: return 4; // float32 default: return 0; } }; const getComponentDataType = (componentType)=>{ switch(componentType){ case 5120: return Int8Array; case 5121: return Uint8Array; case 5122: return Int16Array; case 5123: return Uint16Array; case 5124: return Int32Array; case 5125: return Uint32Array; case 5126: return Float32Array; default: return null; } }; const gltfToEngineSemanticMap = { 'POSITION': SEMANTIC_POSITION, 'NORMAL': SEMANTIC_NORMAL, 'TANGENT': SEMANTIC_TANGENT, 'COLOR_0': SEMANTIC_COLOR, 'JOINTS_0': SEMANTIC_BLENDINDICES, 'WEIGHTS_0': SEMANTIC_BLENDWEIGHT, 'TEXCOORD_0': SEMANTIC_TEXCOORD0, 'TEXCOORD_1': SEMANTIC_TEXCOORD1, 'TEXCOORD_2': SEMANTIC_TEXCOORD2, 'TEXCOORD_3': SEMANTIC_TEXCOORD3, 'TEXCOORD_4': SEMANTIC_TEXCOORD4, 'TEXCOORD_5': SEMANTIC_TEXCOORD5, 'TEXCOORD_6': SEMANTIC_TEXCOORD6, 'TEXCOORD_7': SEMANTIC_TEXCOORD7 }; // order vertexDesc to match the rest of the engine const attributeOrder = { [SEMANTIC_POSITION]: 0, [SEMANTIC_NORMAL]: 1, [SEMANTIC_TANGENT]: 2, [SEMANTIC_COLOR]: 3, [SEMANTIC_BLENDINDICES]: 4, [SEMANTIC_BLENDWEIGHT]: 5, [SEMANTIC_TEXCOORD0]: 6, [SEMANTIC_TEXCOORD1]: 7, [SEMANTIC_TEXCOORD2]: 8, [SEMANTIC_TEXCOORD3]: 9, [SEMANTIC_TEXCOORD4]: 10, [SEMANTIC_TEXCOORD5]: 11, [SEMANTIC_TEXCOORD6]: 12, [SEMANTIC_TEXCOORD7]: 13 }; // returns a function for dequantizing the data type const getDequantizeFunc = (srcType)=>{ // see https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_mesh_quantization#encoding-quantized-data switch(srcType){ case TYPE_INT8: return (x)=>Math.max(x / 127.0, -1); case TYPE_UINT8: return (x)=>x / 255.0; case TYPE_INT16: return (x)=>Math.max(x / 32767.0, -1); case TYPE_UINT16: return (x)=>x / 65535.0; default: return (x)=>x; } }; // dequantize an array of data const dequantizeArray = (dstArray, srcArray, srcType)=>{ const convFunc = getDequantizeFunc(srcType); const len = srcArray.length; for(let i = 0; i < len; ++i){ dstArray[i] = convFunc(srcArray[i]); } return dstArray; }; // get accessor data, making a copy and patching in the case of a sparse accessor const getAccessorData = (gltfAccessor, bufferViews, flatten = false)=>{ const numComponents = getNumComponents(gltfAccessor.type); const dataType = getComponentDataType(gltfAccessor.componentType); if (!dataType) { return null; } let result; if (gltfAccessor.sparse) { // handle sparse data const sparse = gltfAccessor.sparse; // get indices data const indicesAccessor = { count: sparse.count, type: 'SCALAR' }; const indices = getAccessorData(Object.assign(indicesAccessor, sparse.indices), bufferViews, true); // data values data const valuesAccessor = { count: sparse.count, type: gltfAccessor.type, componentType: gltfAccessor.componentType }; const values = getAccessorData(Object.assign(valuesAccessor, sparse.values), bufferViews, true); // get base data if (gltfAccessor.hasOwnProperty('bufferView')) { const baseAccessor = { bufferView: gltfAccessor.bufferView, byteOffset: gltfAccessor.byteOffset, componentType: gltfAccessor.componentType, count: gltfAccessor.count, type: gltfAccessor.type }; // make a copy of the base data since we'll patch the values result = getAccessorData(baseAccessor, bufferViews, true).slice(); } else { // there is no base data, create empty 0'd out data result = new dataType(gltfAccessor.count * numComponents); } for(let i = 0; i < sparse.count; ++i){ const targetIndex = indices[i]; for(let j = 0; j < numComponents; ++j){ result[targetIndex * numComponents + j] = values[i * numComponents + j]; } } } else { if (gltfAccessor.hasOwnProperty('bufferView')) { const bufferView = bufferViews[gltfAccessor.bufferView]; if (flatten && bufferView.hasOwnProperty('byteStride')) { // flatten stridden data const bytesPerElement = numComponents * dataType.BYTES_PER_ELEMENT; const storage = new ArrayBuffer(gltfAccessor.count * bytesPerElement); const tmpArray = new Uint8Array(storage); let dstOffset = 0; for(let i = 0; i < gltfAccessor.count; ++i){ // no need to add bufferView.byteOffset because accessor takes this into account let srcOffset = (gltfAccessor.byteOffset || 0) + i * bufferView.byteStride; for(let b = 0; b < bytesPerElement; ++b){ tmpArray[dstOffset++] = bufferView[srcOffset++]; } } result = new dataType(storage); } else { result = new dataType(bufferView.buffer, bufferView.byteOffset + (gltfAccessor.byteOffset || 0), gltfAccessor.count * numComponents); } } else { result = new dataType(gltfAccessor.count * numComponents); } } return result; }; // get accessor data as (unnormalized, unquantized) Float32 data const getAccessorDataFloat32 = (gltfAccessor, bufferViews)=>{ const data = getAccessorData(gltfAccessor, bufferViews, true); if (data instanceof Float32Array || !gltfAccessor.normalized) { // if the source data is quantized (say to int16), but not normalized // then reading the values of the array is the same whether the values // are stored as float32 or int16. so probably no need to convert to // float32. return data; } const float32Data = new Float32Array(data.length); dequantizeArray(float32Data, data, getComponentType(gltfAccessor.componentType)); return float32Data; }; // returns a dequantized bounding box for the accessor const getAccessorBoundingBox = (gltfAccessor)=>{ let min = gltfAccessor.min; let max = gltfAccessor.max; if (!min || !max) { return null; } if (gltfAccessor.normalized) { const ctype = getComponentType(gltfAccessor.componentType); min = dequantizeArray([], min, ctype); max = dequantizeArray([], max, ctype); } return new BoundingBox(new Vec3((max[0] + min[0]) * 0.5, (max[1] + min[1]) * 0.5, (max[2] + min[2]) * 0.5), new Vec3((max[0] - min[0]) * 0.5, (max[1] - min[1]) * 0.5, (max[2] - min[2]) * 0.5)); }; const getPrimitiveType = (primitive)=>{ if (!primitive.hasOwnProperty('mode')) { return PRIMITIVE_TRIANGLES; } switch(primitive.mode){ case 0: return PRIMITIVE_POINTS; case 1: return PRIMITIVE_LINES; case 2: return PRIMITIVE_LINELOOP; case 3: return PRIMITIVE_LINESTRIP; case 4: return PRIMITIVE_TRIANGLES; case 5: return PRIMITIVE_TRISTRIP; case 6: return PRIMITIVE_TRIFAN; default: return PRIMITIVE_TRIANGLES; } }; const generateIndices = (numVertices)=>{ const dummyIndices = new Uint16Array(numVertices); for(let i = 0; i < numVertices; i++){ dummyIndices[i] = i; } return dummyIndices; }; const generateNormals = (sourceDesc, indices)=>{ // get positions const p = sourceDesc[SEMANTIC_POSITION]; if (!p || p.components !== 3) { return; } let positions; if (p.size !== p.stride) { // extract positions which aren't tightly packed const srcStride = p.stride / typedArrayTypesByteSize[p.type]; const src = new typedArrayTypes[p.type](p.buffer, p.offset, p.count * srcStride); positions = new typedArrayTypes[p.type](p.count * 3); for(let i = 0; i < p.count; ++i){ positions[i * 3 + 0] = src[i * srcStride + 0]; positions[i * 3 + 1] = src[i * srcStride + 1]; positions[i * 3 + 2] = src[i * srcStride + 2]; } } else { // position data is tightly packed so we can use it directly positions = new typedArrayTypes[p.type](p.buffer, p.offset, p.count * 3); } const numVertices = p.count; // generate indices if necessary if (!indices) { indices = generateIndices(numVertices); } // generate normals const normalsTemp = calculateNormals(positions, indices); const normals = new Float32Array(normalsTemp.length); normals.set(normalsTemp); sourceDesc[SEMANTIC_NORMAL] = { buffer: normals.buffer, size: 12, offset: 0, stride: 12, count: numVertices, components: 3, type: TYPE_FLOAT32 }; }; // given a texture, clone it // NOTE: CPU-side texture data will be shared but GPU memory will be duplicated const cloneTexture = (texture)=>{ const shallowCopyLevels = (texture)=>{ const result = []; for(let mip = 0; mip < texture._levels.length; ++mip){ let level = []; if (texture.cubemap) { for(let face = 0; face < 6; ++face){ level.push(texture._levels[mip][face]); } } else { level = texture._levels[mip]; } result.push(level); } return result; }; const result = new Texture(texture.device, texture); // duplicate texture result._levels = shallowCopyLevels(texture); // shallow copy the levels structure return result; }; // given a texture asset, clone it const cloneTextureAsset = (src)=>{ const result = new Asset(`${src.name}_clone`, src.type, src.file, src.data, src.options); result.loaded = true; result.resource = cloneTexture(src.resource); src.registry.add(result); return result; }; const createVertexBufferInternal = (device, sourceDesc)=>{ const positionDesc = sourceDesc[SEMANTIC_POSITION]; if (!positionDesc) { // ignore meshes without positions return null; } const numVertices = positionDesc.count; // generate vertexDesc elements const vertexDesc = []; for(const semantic in sourceDesc){ if (sourceDesc.hasOwnProperty(semantic)) { const element = { semantic: semantic, components: sourceDesc[semantic].components, type: sourceDesc[semantic].type, normalize: !!sourceDesc[semantic].normalize }; if (!VertexFormat.isElementValid(device, element)) { // WebGP does not support some formats and we need to remap it to one larger, for example int16x3 -> int16x4 // TODO: this might need the actual data changes if this element is the last one in the vertex, as it might // try to read outside of the vertex buffer. element.components++; } vertexDesc.push(element); } } // sort vertex elements by engine-ideal order vertexDesc.sort((lhs, rhs)=>{ return attributeOrder[lhs.semantic] - attributeOrder[rhs.semantic]; }); let i, j, k; let source, target, sourceOffset; const vertexFormat = new VertexFormat(device, vertexDesc); // check whether source data is correctly interleaved let isCorrectlyInterleaved = true; for(i = 0; i < vertexFormat.elements.length; ++i){ target = vertexFormat.elements[i]; source = sourceDesc[target.name]; sourceOffset = source.offset - positionDesc.offset; if (source.buffer !== positionDesc.buffer || source.stride !== target.stride || source.size !== target.size || sourceOffset !== target.offset) { isCorrectlyInterleaved = false; break; } } // create vertex buffer const vertexBuffer = new VertexBuffer(device, vertexFormat, numVertices); const vertexData = vertexBuffer.lock(); const targetArray = new Uint32Array(vertexData); let sourceArray; if (isCorrectlyInterleaved) { // copy data sourceArray = new Uint32Array(positionDesc.buffer, positionDesc.offset, numVertices * vertexBuffer.format.size / 4); targetArray.set(sourceArray); } else { let targetStride, sourceStride; // copy data and interleave for(i = 0; i < vertexBuffer.format.elements.length; ++i){ target = vertexBuffer.format.elements[i]; targetStride = target.stride / 4; source = sourceDesc[target.name]; sourceStride = source.stride / 4; // ensure we don't go beyond the end of the arraybuffer when dealing with // interlaced vertex formats sourceArray = new Uint32Array(source.buffer, source.offset, (source.count - 1) * sourceStride + (source.size + 3) / 4); let src = 0; let dst = target.offset / 4; const kend = Math.floor((source.size + 3) / 4); for(j = 0; j < numVertices; ++j){ for(k = 0; k < kend; ++k){ targetArray[dst + k] = sourceArray[src + k]; } src += sourceStride; dst += targetStride; } } } vertexBuffer.unlock(); return vertexBuffer; }; const createVertexBuffer = (device, attributes, indices, accessors, bufferViews, vertexBufferDict)=>{ // extract list of attributes to use const useAttributes = {}; const attribIds = []; for(const attrib in attributes){ if (attributes.hasOwnProperty(attrib) && gltfToEngineSemanticMap.hasOwnProperty(attrib)) { useAttributes[attrib] = attributes[attrib]; // build unique id for each attribute in format: Semantic:accessorIndex attribIds.push(`${attrib}:${attributes[attrib]}`); } } // sort unique ids and create unique vertex buffer ID attribIds.sort(); const vbKey = attribIds.join(); // return already created vertex buffer if identical let vb = vertexBufferDict[vbKey]; if (!vb) { // build vertex buffer format desc and source const sourceDesc = {}; for(const attrib in useAttributes){ const accessor = accessors[attributes[attrib]]; const accessorData = getAccessorData(accessor, bufferViews); const bufferView = bufferViews[accessor.bufferView]; const semantic = gltfToEngineSemanticMap[attrib]; const size = getNumComponents(accessor.type) * getComponentSizeInBytes(accessor.componentType); const stride = bufferView && bufferView.hasOwnProperty('byteStride') ? bufferView.byteStride : size; sourceDesc[semantic] = { buffer: accessorData.buffer, size: size, offset: accessorData.byteOffset, stride: stride, count: accessor.count, components: getNumComponents(accessor.type), type: getComponentType(accessor.componentType), normalize: accessor.normalized }; } // generate normals if they're missing (this should probably be a user option) if (!sourceDesc.hasOwnProperty(SEMANTIC_NORMAL)) { generateNormals(sourceDesc, indices); } // create and store it in the dictionary vb = createVertexBufferInternal(device, sourceDesc); vertexBufferDict[vbKey] = vb; } return vb; }; const createSkin = (device, gltfSkin, accessors, bufferViews, nodes, glbSkins)=>{ let i, j, bindMatrix; const joints = gltfSkin.joints; const numJoints = joints.length; const ibp = []; if (gltfSkin.hasOwnProperty('inverseBindMatrices')) { const inverseBindMatrices = gltfSkin.inverseBindMatrices; const ibmData = getAccessorData(accessors[inverseBindMatrices], bufferViews, true); const ibmValues = []; for(i = 0; i < numJoints; i++){ for(j = 0; j < 16; j++){ ibmValues[j] = ibmData[i * 16 + j]; } bindMatrix = new Mat4(); bindMatrix.set(ibmValues); ibp.push(bindMatrix); } } else { for(i = 0; i < numJoints; i++){ bindMatrix = new Mat4(); ibp.push(bindMatrix); } } const boneNames = []; for(i = 0; i < numJoints; i++){ boneNames[i] = nodes[joints[i]].name; } // create a cache key from bone names and see if we have matching skin const key = boneNames.join('#'); let skin = glbSkins.get(key); if (!skin) { // create the skin and add it to the cache skin = new Skin(device, ibp, boneNames); glbSkins.set(key, skin); } return skin; }; const createDracoMesh = (device, primitive, accessors, bufferViews, meshVariants, meshDefaultMaterials, promises)=>{ // create the mesh const result = new Mesh(device); result.aabb = getAccessorBoundingBox(accessors[primitive.attributes.POSITION]); // create vertex description const vertexDesc = []; for (const [name, index] of Object.entries(primitive.attributes)){ const accessor = accessors[index]; const semantic = gltfToEngineSemanticMap[name]; const componentType = getComponentType(accessor.componentType); vertexDesc.push({ semantic: semantic, components: getNumComponents(accessor.type), type: componentType, normalize: accessor.normalized ?? (semantic === SEMANTIC_COLOR && (componentType === TYPE_UINT8 || componentType === TYPE_UINT16)) }); } promises.push(new Promise((resolve, reject)=>{ // decode draco data const dracoExt = primitive.extensions.KHR_draco_mesh_compression; dracoDecode(bufferViews[dracoExt.bufferView].slice().buffer, (err, decompressedData)=>{ if (err) { console.log(err); reject(err); } else { // worker reports order of attributes as array of attribute unique_id const order = {}; for (const [name, index] of Object.entries(dracoExt.attributes)){ order[gltfToEngineSemanticMap[name]] = decompressedData.attributes.indexOf(index); } // order vertexDesc vertexDesc.sort((a, b)=>{ return order[a.semantic] - order[b.semantic]; }); // draco decompressor will generate normals if they are missing if (!primitive.attributes?.NORMAL) { vertexDesc.splice(1, 0, { semantic: 'NORMAL', components: 3, type: TYPE_FLOAT32 }); } const vertexFormat = new VertexFormat(device, vertexDesc); // create vertex buffer const numVertices = decompressedData.vertices.byteLength / vertexFormat.size; const indexFormat = numVertices <= 65535 ? INDEXFORMAT_UINT16 : INDEXFORMAT_UINT32; const numIndices = decompressedData.indices.byteLength / (numVertices <= 65535 ? 2 : 4); Debug.call(()=>{ if (numVertices !== accessors[primitive.attributes.POSITION].count) { Debug.warn('mesh has invalid vertex count'); } if (numIndices !== accessors[primitive.indices].count) { Debug.warn('mesh has invalid index count'); } }); const vertexBuffer = new VertexBuffer(device, vertexFormat, numVertices, { data: decompressedData.vertices }); const indexBuffer = new IndexBuffer(device, indexFormat, numIndices, BUFFER_STATIC, decompressedData.indices); result.vertexBuffer = vertexBuffer; result.indexBuffer[0] = indexBuffer; result.primitive[0].type = getPrimitiveType(primitive); result.primitive[0].base = 0; result.primitive[0].count = indexBuffer ? numIndices : numVertices; result.primitive[0].indexed = !!indexBuffer; resolve(); } }); })); // handle material variants if (primitive?.extensions?.KHR_materials_variants) { const variants = primitive.extensions.KHR_materials_variants; const tempMapping = {}; variants.mappings.forEach((mapping)=>{ mapping.variants.forEach((variant)=>{ tempMapping[variant] = mapping.material; }); }); meshVariants[result.id] = tempMapping; } meshDefaultMaterials[result.id] = primitive.material; return result; }; const createMesh = (device, gltfMesh, accessors, bufferViews, vertexBufferDict, meshVariants, meshDefaultMaterials, assetOptions, promises)=>{ const meshes = []; gltfMesh.primitives.forEach((primitive)=>{ if (primitive.extensions?.KHR_draco_mesh_compression) { // handle draco compressed mesh meshes.push(createDracoMesh(device, primitive, accessors, bufferViews, meshVariants, meshDefaultMaterials, promises)); } else { // handle uncompressed mesh let indices = primitive.hasOwnProperty('indices') ? getAccessorData(accessors[primitive.indices], bufferViews, true) : null; const vertexBuffer = createVertexBuffer(device, primitive.attributes, indices, accessors, bufferViews, vertexBufferDict); const primitiveType = getPrimitiveType(primitive); // build the mesh const mesh = new Mesh(device); mesh.vertexBuffer = vertexBuffer; mesh.primitive[0].type = primitiveType; mesh.primitive[0].base = 0; mesh.primitive[0].indexed = indices !== null; // index buffer if (indices !== null) { let indexFormat; if (indices instanceof Uint8Array) { indexFormat = INDEXFORMAT_UINT8; } else if (indices instanceof Uint16Array) { indexFormat = INDEXFORMAT_UINT16; } else { indexFormat = INDEXFORMAT_UINT32; } if (indexFormat === INDEXFORMAT_UINT8 && device.isWebGPU) { // silently convert to 16bit indexFormat = INDEXFORMAT_UINT16; indices = new Uint16Array(indices); } const indexBuffer = new IndexBuffer(device, indexFormat, indices.length, BUFFER_STATIC, indices); mesh.indexBuffer[0] = indexBuffer; mesh.primitive[0].count = indices.length; } else { mesh.primitive[0].count = vertexBuffer.numVertices; } if (primitive.hasOwnProperty('extensions') && primitive.extensions.hasOwnProperty('KHR_materials_variants')) { const variants = primitive.extensions.KHR_materials_variants; const tempMapping = {}; variants.mappings.forEach((mapping)=>{ mapping.variants.forEach((variant)=>{ tempMapping[variant] = mapping.material; }); }); meshVariants[mesh.id] = tempMapping; } meshDefaultMaterials[mesh.id] = primitive.material; let accessor = accessors[primitive.attributes.POSITION]; mesh.aabb = getAccessorBoundingBox(accessor); // morph targets if (primitive.hasOwnProperty('targets')) { const targets = []; primitive.targets.forEach((target, index)=>{ const options = {}; if (target.hasOwnProperty('POSITION')) { accessor = accessors[target.POSITION]; options.deltaPositions = getAccessorDataFloat32(accessor, bufferViews); options.aabb = getAccessorBoundingBox(accessor); } if (target.hasOwnProperty('NORMAL')) { accessor = accessors[target.NORMAL]; // NOTE: the morph targets can't currently accept quantized normals options.deltaNormals = getAccessorDataFloat32(accessor, bufferViews); } // name if specified if (gltfMesh.hasOwnProperty('extras') && gltfMesh.extras.hasOwnProperty('targetNames')) { options.name = gltfMesh.extras.targetNames[index]; } else { options.name = index.toString(10); } // default weight if specified if (gltfMesh.hasOwnProperty('weights')) { options.defaultWeight = gltfMesh.weights[index]; } options.preserveData = assetOptions.morphPreserveData; targets.push(new MorphTarget(options)); }); mesh.morph = new Morph(targets, device, { preferHighPrecision: assetOptions.morphPreferHighPrecision }); } meshes.push(mesh); } }); return meshes; }; const extractTextureTransform = (source, material, maps)=>{ let map; const texCoord = source.texCoord; if (texCoord) { for(map = 0; map < maps.length; ++map){ material[`${maps[map]}MapUv`] = texCoord; } } const zeros = [ 0, 0 ]; const ones = [ 1, 1 ]; const textureTransform = source.extensions?.KHR_texture_transform; if (textureTransform) { const offset = textureTransform.offset || zeros; const scale = textureTransform.scale || ones; const rotation = textureTransform.rotation ? -textureTransform.rotation * math.RAD_TO_DEG : 0; const tilingVec = new Vec2(scale[0], scale[1]); const offsetVec = new Vec2(offset[0], 1.0 - scale[1] - offset[1]); for(map = 0; map < maps.length; ++map){ material[`${maps[map]}MapTiling`] = tilingVec; material[`${maps[map]}MapOffset`] = offsetVec; material[`${maps[map]}MapRotation`] = rotation; } } }; const extensionPbrSpecGlossiness = (data, material, textures)=>{ let texture; if (data.hasOwnProperty('diffuseFactor')) { const [r, g, b, a] = data.diffuseFactor; material.diffuse.set(r, g, b).gamma(); material.opacity = a; } else { material.diffuse.set(1, 1, 1); material.opacity = 1; } if (data.hasOwnProperty('diffuseTexture')) { const diffuseTexture = data.diffuseTexture; texture = textures[diffuseTexture.index]; material.diffuseMap = texture; material.diffuseMapChannel = 'rgb'; material.opacityMap = texture; material.opacityMapChannel = 'a'; extractTextureTransform(diffuseTexture, material, [ 'diffuse', 'opacity' ]); } material.useMetalness = false; if (data.hasOwnProperty('specularFactor')) { const [r, g, b] = data.specularFactor; material.specular.set(r, g, b).gamma(); } else { material.specular.set(1, 1, 1); } if (data.hasOwnProperty('glossinessFactor')) { material.gloss = data.glossinessFactor; } else { material.gloss = 1.0; } if (data.hasOwnProperty('specularGlossinessTexture')) { const specularGlossinessTexture = data.specularGlossinessTexture; material.specularMap = material.glossMap = textures[specularGlossinessTexture.index]; material.specularMapChannel = 'rgb'; material.glossMapChannel = 'a'; extractTextureTransform(specularGlossinessTexture, material, [ 'gloss', 'metalness' ]); } }; const extensionClearCoat = (data, material, textures)=>{ if (data.hasOwnProperty('clearcoatFactor')) { material.clearCoat = data.clearcoatFactor * 0.25; // TODO: remove temporary workaround for replicating glTF clear-coat visuals } else { material.clearCoat = 0; } if (data.hasOwnProperty('clearcoatTexture')) { const clearcoatTexture = data.clearcoatTexture; material.clearCoatMap = textures[clearcoatTexture.index]; material.clearCoatMapChannel = 'r'; extractTextureTransform(clearcoatTexture, material, [ 'clearCoat' ]); } if (data.hasOwnProperty('clearcoatRoughnessFactor')) { material.clearCoatGloss = data.clearcoatRoughnessFactor; } else { material.clearCoatGloss = 0; } if (data.hasOwnProperty('clearcoatRoughnessTexture')) { const clearcoatRoughnessTexture = data.clearcoatRoughnessTexture; material.clearCoatGlossMap = textures[clearcoatRoughnessTexture.index]; material.clearCoatGlossMapChannel = 'g'; extractTextureTransform(clearcoatRoughnessTexture, material, [ 'clearCoatGloss' ]); } if (data.hasOwnProperty('clearcoatNormalTexture')) { const clearcoatNormalTexture = data.clearcoatNormalTexture; material.clearCoatNormalMap = textures[clearcoatNormalTexture.index]; extractTextureTransform(clearcoatNormalTexture, material, [ 'clearCoatNormal' ]); if (clearcoatNormalTexture.hasOwnProperty('scale')) { material.clearCoatBumpiness = clearcoatNormalTexture.scale; } else { material.clearCoatBumpiness = 1; } } material.clearCoatGlossInvert = true; }; const extensionUnlit = (data, material, textures)=>{ material.useLighting = false; // copy diffuse into emissive material.emissive.copy(material.diffuse); material.emissiveMap = material.diffuseMap; material.emissiveMapUv = material.diffuseMapUv; material.emissiveMapTiling.copy(material.diffuseMapTiling); material.emissiveMapOffset.copy(material.diffuseMapOffset); material.emissiveMapRotation = material.diffuseMapRotation; material.emissiveMapChannel = material.diffuseMapChannel; material.emissiveVertexColor = material.diffuseVertexColor; material.emissiveVertexColorChannel = material.diffuseVertexColorChannel; // disable lighting and skybox material.useLighting = false; material.useSkybox = false; // blank diffuse material.diffuse.set(1, 1, 1); material.diffuseMap = null; material.diffuseVertexColor = false; }; const extensionSpecular = (data, material, textures)=>{ material.useMetalnessSpecularColor = true; if (data.hasOwnProperty('specularColorTexture')) { material.specularMap = textures[data.specularColorTexture.index]; material.specularMapChannel = 'rgb'; extractTextureTransform(data.specularColorTexture, material, [ 'specular' ]); } if (data.hasOwnProperty('specularColorFactor')) { const [r, g, b] = data.specularColorFactor; material.specular.set(r, g, b).gamma(); } else { material.specular.set(1, 1, 1); } if (data.hasOwnProperty('specularFactor')) { material.specularityFactor = data.specularFactor; } else { material.specularityFactor = 1; } if (data.hasOwnProperty('specularTexture')) { material.specularityFactorMapChannel = 'a'; material.specularityFactorMap = textures[data.specularTexture.index]; extractTextureTransform(data.specularTexture, material, [ 'specularityFactor' ]); } }; const extensionIor = (data, material, textures)=>{ if (data.hasOwnProperty('ior')) { material.refractionIndex = 1.0 / data.ior; } }; const extensionDispersion = (data, material, textures)=>{ if (data.hasOwnProperty('dispersion')) { material.dispersion = data.dispersion; } }; const extensionTransmission = (data, material, textures)=>{ material.blendType = BLEND_NORMAL; material.useDynamicRefraction = true; if (data.hasOwnProperty('transmissionFactor')) { material.refraction = data.transmissionFactor; } if (data.hasOwnProperty('transmissionTexture')) { material.refractionMapChannel = 'r'; material.refractionMap = textures[data.transmissionTexture.index]; extractTextureTransform(data.transmissionTexture, material, [ 'refraction' ]); } }; const extensionSheen = (data, material, textures)=>{ material.useSheen = true; if (data.hasOwnProperty('sheenColorFactor')) { const [r, g, b] = data.sheenColorFactor; material.sheen.set(r, g, b).gamma(); } else { material.sheen.set(1, 1, 1); } if (data.hasOwnProperty('sheenColorTexture')) { material.sheenMap = textures[data.sheenColorTexture.index]; extractTextureTransform(data.sheenColorTexture, material, [ 'sheen' ]); } material.sheenGloss = data.hasOwnProperty('sheenRoughnessFactor') ? data.sheenRoughnessFactor : 0.0; if (data.hasOwnProperty('sheenRoughnessTexture')) { material.sheenGlossMap = textures[data.sheenRoughnessTexture.index]; material.sheenGlossMapChannel = 'a'; extractTextureTransform(data.sheenRoughnessTexture, material, [ 'sheenGloss' ]); } material.sheenGlossInvert = true; }; const extensionVolume = (data, material, textures)=>{ material.blendType = BLEND_NORMAL; material.useDynamicRefraction = true; if (data.hasOwnProperty('thicknessFactor')) { material.thickness = data.thicknessFactor; } if (data.hasOwnProperty('thicknessTexture')) { material.thicknessMap = textures[data.thicknessTexture.index]; material.thicknessMapChannel = 'g'; extractTextureTransform(data.thicknessTexture, material, [ 'thickness' ]); } if (data.hasOwnProperty('attenuationDistance')) { material.attenuationDistance = data.attenuationDistance; } if (data.hasOwnProperty('attenuationColor')) { const [r, g, b] = data.attenuationColor; material.attenuation.set(r, g, b).gamma(); } }; const extensionEmissiveStrength = (data, material, textures)=>{ if (data.hasOwnProperty('emissiveStrength')) { material.emissiveIntensity = data.emissiveStrength; } }; const extensionIridescence = (data, material, textures)=>{ material.useIridescence = true; if (data.hasOwnProperty('iridescenceFactor')) { material.iridescence = data.iridescenceFactor; } if (data.hasOwnProperty('iridescenceTexture')) { material.iridescenceMapChannel = 'r'; material.iridescenceMap = textures[data.iridescenceTexture.index]; extractTextureTransform(data.iridescenceTexture, material, [ 'iridescence' ]); } if (data.hasOwnProperty('iridescenceIor')) { material.iridescenceRefractionIndex = data.iridescenceIor; } if (data.hasOwnProperty('iridescenceThicknessMinimum')) { material.iridescenceThicknessMin = data.iridescenceThicknessMinimum; } if (data.hasOwnProperty('iridescenceThicknessMaximum')) { material.iridescenceThicknessMax = data.iridescenceThicknessMaximum; } if (data.hasOwnProperty('iridescenceThicknessTexture')) { material.iridescenceThicknessMapChannel = 'g'; material.iridescenceThicknessMap = textures[data.iridescenceThicknessTexture.index]; extractTextureTransform(data.iridescenceThicknessTexture, material, [ 'iridescenceThickness' ]); } }; const extensionAnisotropy = (data, material, textures)=>{ material.enableGGXSpecular = true; if (data.hasOwnProperty('anisotropyStrength')) { material.anisotropyIntensity = data.anisotropyStrength; } else { material.anisotropyIntensity = 0; } if (data.hasOwnProperty('anisotropyTexture')) { const anisotropyTexture = data.anisotropyTexture; material.anisotropyMap = textures[anisotropyTexture.index]; extractTextureTransform(anisotropyTexture, material, [ 'anisotropy' ]); } if (data.hasOwnProperty('anisotropyRotation')) { material.anisotropyRotation = data.anisotropyRotation * math.RAD_TO_DEG; } else { material.anisotropyRotation = 0; } }; const createMaterial = (gltfMaterial, textures)=>{ const material = new StandardMaterial(); if (gltfMaterial.hasOwnProperty('name')) { material.name = gltfMaterial.name; } // glTF doesn't define how to occlude specular material.occludeSpecular = SPECOCC_AO; material.diffuseVertexColor = true; material.specularTint = true; material.specularVertexColor = true; // Set glTF spec defaults material.specular.set(1, 1, 1); material.gloss = 1; material.glossInvert = true; material.useMetalness = true; let texture; if (gltfMaterial.hasOwnProperty('pbrMetallicRoughness')) { const pbrData = gltfMaterial.pbrMetallicRoughness; if (pbrData.hasOwnProperty('baseColorFactor')) { const [r, g, b, a] = pbrData.baseColorFactor; material.diffuse.set(r, g, b).gamma(); material.opacity = a; } if (pbrData.hasOwnProperty('baseColorTexture')) { const baseColorTexture = pbrData.baseColorTexture; texture = textures[baseColorTexture.index]; material.diffuseMap = texture; material.diffuseMapChannel = 'rgb'; material.opacityMap = texture; material.opacityMapChannel = 'a'; extractTextureTransform(baseColorTexture, material, [ 'diffuse', 'opacity' ]); } if (pbrData.hasOwnProperty('metallicFactor')) { material.metalness = pbrData.metallicFactor; } if (pbrData.hasOwnProperty('roughnessFactor')) { material.gloss = pbrData.roughnessFactor; } if (pbrData.hasOwnProperty('metallicRoughnessTexture')) { const metallicRoughnessTexture = pbrData.metallicRoughnessTexture; material.metalnessMap = material.glossMap = textures[metallicRoughnessTexture.index]; material.metalnessMapChannel = 'b'; material.glossMapChannel = 'g'; extractTextureTransform(metallicRoughnessTexture, material, [ 'gloss', 'metalness' ]); } } if (gltfMaterial.hasOwnProperty('normalTexture')) { const normalTexture = gltfMaterial.normalTexture; material.normalMap = textures[normalTexture.index]; extractTextureTransform(normalTexture, material, [ 'normal' ]); if (normalTexture.hasOwnProperty('scale')) { material.bumpiness = normalTexture.scale; } } if (gltfMaterial.hasOwnProperty('occlusionTexture')) { const occlusionTexture = gltfMaterial.occlusionTexture; material.aoMap = textures[occlusionTexture.index]; material.aoMapChannel = 'r'; extractTextureTransform(occlusionTexture, material, [ 'ao' ]); // TODO: support 'strength' } if (gltfMaterial.hasOwnProperty('emissiveFactor')) { const [r, g, b] = gltfMaterial.emissiveFactor; material.emissive.set(r, g, b).gamma(); } if (gltfMaterial.hasOwnProperty('emissiveTexture')) { const emissiveTexture = gltfMaterial.emissiveTexture; material.emissiveMap = textures[emissiveTexture.index]; extractTextureTransform(emissiveTexture, material, [ 'emissive' ]); } if (gltfMaterial.hasOwnProperty('alphaMode')) { switch(gltfMaterial.alphaMode){ case 'MASK': material.blendType = BLEND_NONE; if (gltfMaterial.hasOwnProperty('alphaCutoff')) { material.alphaTest = gltfMaterial.alphaCutoff; } else { material.alphaTest = 0.5; } break; case 'BLEND': material.blendType = BLEND_NORMAL; // note: by default don't write depth on semitransparent materials material.depthWrite = false; break; default: case 'OPAQUE': material.blendType = BLEND_NONE; break; } } else { material.blendType = BLEND_NONE; } if (gltfMaterial.hasOwnProperty('doubleSided')) { material.twoSidedLighting = gltfMaterial.doubleSided; material.cull = gltfMaterial.doubleSided ? CULLFACE_NONE : CULLFACE_BACK; } else { material.twoSidedLighting = false; material.cull = CULLFACE_BACK; } // Provide list of supported extensions and their functions const extensions = { 'KHR_materials_clearcoat': extensionClearCoat, 'KHR_materials_emissive_strength': extensionEmissiveStrength, 'KHR_materials_ior': extensionIor, 'KHR_materials_dispersion': extensionDispersion, 'KHR_materials_iridescence': extensionIridescence, 'KHR_materials_pbrSpecularGlossiness': extensionPbrSpecGlossiness, 'KHR_materials_sheen': extensionSheen, 'KHR_materials_specular': extensionSpecular, 'KHR_materials_transmission': extensionTransmission, 'KHR_materials_unlit': extensionUnlit, 'KHR_materials_volume': extensionVolume, 'KHR_materials_anisotropy': extensionAnisotropy }; // Handle extensions if (gltfMaterial.hasOwnProperty('extensions')) { for(const key in gltfMaterial.extensions){ const extensionFunc = extensions[key]; if (extensionFunc !== undefined) { extensionFunc(gltfMaterial.extensions[key], material, textures); } } } material.update(); return material; }; // create the anim structure const createAnimation = (gltfAnimation, animationIndex, gltfAccessors, bufferViews, nodes, meshes, gltfNodes)=>{ // create animation data block for the accessor const createAnimData = (gltfAccessor)=>{ return new AnimData(getNumComponents(gltfAccessor.type), getAccessorDataFloat32(gltfAccessor, bufferViews)); }; const interpMap = { 'STEP': INTERPOLATION_STEP, 'LINEAR': INTERPOLATION_LINEAR, 'CUBICSPLINE': INTERPOLATION_CUBIC }; // Input and output maps reference data by sampler input/output key. const inputMap = {}; const outputMap = {}; // The curve map stores temporary curve data by sampler index. Each curves input/output value will be resolved to an inputs/outputs array index after all samplers have been processed. // Curves and outputs that are deleted from their maps will not be included in the final AnimTrack const curveMap = {}; let outputCounter = 1; let i; // convert samplers for(i = 0; i < gltfAnimation.samplers.length; ++i){ const sampler = gltfAnimation.samplers[i]; // get input data if (!inputMap.hasOwnProperty(sampler.input)) { inputMap[sampler.input] = createAnimData(gltfAccessors[sampler.input]); } // get output data if (!outputMap.hasOwnProperty(sampler.output)) { outputMap[sampler.output] = createAnimData(gltfAccessors[sampler.output]); } const interpolation = sampler.hasOwnProperty('interpolation') && interpMap.hasOwnProperty(sampler.interpolation) ? interpMap[sampler.interpolation] : INTERPOLATION_LINEAR; // create curve const curve = { paths: [], input: sampler.input, output: sampler.output, interpolation: interpolation }; curveMap[i] = curve; } const quatArrays = []; const transformSchema = { 'translation': 'localPosition', 'rotation': 'localRotation', 'scale': 'localScale' }; const constructNodePath = (node)=>{ const path = []; while(node){ path.unshift(node.name); node = node.parent; } return path; }; // All morph targets are included in a single channel of the animation, with all targets output data interleaved with each other. // This function splits each morph target out into it a curve with its own output data, allowing us to animate each morph target independently by name. const createMorphTargetCurves =